2 Layer Network

Here we will make a new multi-layer network.


In [2]:
%matplotlib inline
import matplotlib.pyplot as plt
import numpy as np
from IPython.html.widgets import interact
from sklearn.datasets import load_digits
import core
digits = load_digits()


:0: FutureWarning: IPython widgets are experimental and may change in the future.

In [9]:
donnel_v2 = core.gen_network([64,40,40,10])
donnel_v2


Out[9]:
[array([[ 0.32846743, -1.96359467, -0.15075226, ..., -0.25357411,
          1.11221684, -1.29188974],
        [ 0.3199814 , -1.052386  , -0.29878055, ..., -1.79850139,
          0.56462983, -0.81573669],
        [ 0.21228989, -0.28899109,  0.5092346 , ...,  0.10643621,
          0.55327706,  0.1074506 ],
        ..., 
        [ 1.83570395,  0.98652479, -0.96111193, ...,  1.13643729,
          2.35299681,  1.06908648],
        [ 0.07285215,  1.88061704,  1.16656584, ...,  1.31852513,
          0.45805942, -1.09094087],
        [-0.59370075,  1.32065226, -2.3923606 , ..., -0.72139058,
          0.13100103, -0.01504264]]),
 array([[-0.2426794 ,  0.2779744 , -1.53428502, ..., -0.26392914,
          0.5144742 ,  1.54977008],
        [-0.71215878, -0.65155738,  0.25193419, ...,  1.31714479,
          0.24664636, -0.12155331],
        [ 0.07025812,  0.07422465, -0.80381639, ..., -0.2046746 ,
         -0.0405329 ,  0.09818016],
        ..., 
        [-0.52319593,  2.58619785, -1.63600246, ..., -0.65921825,
         -1.88516664,  0.03177858],
        [-2.58008895,  0.04152083, -0.20023133, ..., -0.55092476,
         -0.21782289, -0.71590553],
        [ 1.38949415,  0.15528489, -0.58149334, ..., -0.67888148,
          1.11982539, -0.95647439]]),
 array([[ 0.16263657,  0.91424599,  1.13742118,  1.74071678, -2.55829013,
         -1.46681451, -0.64985903, -0.12716168, -0.48670035,  1.38104058,
          2.7544517 , -0.2009587 , -0.37037801, -0.06786982,  0.73303556,
          0.26229759, -0.38329468, -0.35781205,  1.24135359, -0.60653345,
         -0.02319707, -2.48749007,  0.52132404, -0.73385465, -0.63395323,
         -0.46773027, -0.68888403, -0.69972369,  0.93665977, -0.51745825,
         -1.3858258 ,  0.12560803, -0.1067816 , -1.0294455 ,  2.15518616,
          2.08067421,  0.27245623,  0.17669023,  1.62774711,  0.13771729,
          1.20917446],
        [-0.67034316,  0.09600751,  0.96387226,  0.60199442,  0.26261588,
         -1.15189942,  0.79729241, -1.15440419, -1.29350702, -0.2419184 ,
          0.23790095, -0.9284589 ,  1.25500781, -0.02688044,  0.94508322,
          0.15548582, -0.68510578,  0.3172127 ,  0.37227119, -0.54574034,
         -0.32197777, -1.61046868,  0.21444021, -0.77456397, -1.01284335,
         -0.43770686,  3.08278427,  0.68434671, -0.7125238 , -0.29247013,
         -1.79355183, -1.04194257, -0.3602208 , -0.98475666,  1.67885485,
         -0.65473819,  1.16477907,  1.25054033, -0.24972943,  0.52734868,
          0.36439159],
        [-0.19839662, -1.37143543, -0.30045918,  0.44679423, -0.24146499,
          1.96575303, -1.8022335 ,  1.094329  ,  0.84144499, -0.87154609,
         -1.30408131, -0.57781522, -0.06840441,  0.64017778,  0.78616605,
          0.10340949,  1.3282156 , -1.56665603,  0.19268101, -0.05423892,
          0.50066696,  1.80378761, -1.37857547, -0.23919402, -0.99382315,
         -1.94078752, -0.5431362 , -0.32484614,  0.7597845 ,  0.81189577,
         -1.450231  ,  2.62611943, -0.25265134,  0.43008205,  1.00506105,
         -1.77420807, -0.38977733,  0.08500265,  1.31140473,  1.78226748,
         -2.43284893],
        [-0.77224579, -1.25170389,  1.09408227,  0.41153966, -0.95052032,
          0.81501564,  1.35572033, -1.45258557, -1.65862588,  0.23652454,
          0.49385586, -0.32505524, -0.05520534, -0.41603909, -0.66170467,
          0.4287801 ,  0.22984786, -1.63070013, -0.82093121,  1.15284666,
         -1.04486415,  0.54956132, -0.52788793,  1.01305997, -0.26265085,
         -0.04227386,  0.72047883,  0.87854637,  0.24408909,  0.57287612,
         -0.13881151, -0.1861133 , -0.45721636, -1.63446292, -0.47859945,
         -0.86655487, -0.28209455, -0.90541745, -0.03098719,  0.68570902,
          1.18202855],
        [ 1.54251735, -1.50916357,  0.93014858, -0.34456976,  0.45830045,
          0.02600115,  0.26221441,  0.78244065,  0.62022239,  0.896261  ,
         -0.11068721, -0.80847762, -2.0479468 ,  0.42271772, -0.86538586,
         -1.90963403, -0.97706442,  0.37351016,  2.13150143, -1.60511425,
          1.09883231, -0.08546919, -1.03354789, -0.6776375 , -0.58794362,
          1.08395188, -0.0822178 , -1.26694559, -0.03485559, -1.00261543,
         -0.84635026,  1.85099191, -0.61707132, -0.03245158, -0.8248214 ,
         -0.2972787 , -0.97759881, -0.06250537, -1.02871556, -0.15843845,
          0.25244954],
        [-0.31188287, -0.09970305, -1.13971691,  2.46265779, -1.47653391,
          0.59573971,  1.23068617, -0.08608049, -1.30698553, -0.5240878 ,
         -1.12818919, -0.5036389 ,  0.48595562,  0.13149648,  0.26786922,
          0.27195407, -0.9645846 ,  2.07094018, -0.59817413, -0.01175252,
         -1.3999479 ,  1.02865664, -0.16495601, -0.35564641,  1.87740499,
          0.13464939, -0.7218325 ,  0.5420088 , -0.65915999,  1.41129567,
          0.65998343,  2.72382002,  0.67523153, -0.25497421, -0.16835748,
          0.68466288,  0.8386734 , -0.47679114,  1.391245  ,  0.86756932,
         -0.55799163],
        [-0.92035311,  0.03925017,  0.93630299, -0.10142119,  0.73056749,
         -0.35976935,  0.65295485, -0.11110665,  1.52065071,  0.06311637,
          1.78080246,  0.12282167, -1.47419892, -0.09512844, -1.76183776,
         -0.33701845,  1.43121633,  1.43122797,  0.84202674,  0.68473493,
          0.61004166,  0.2081037 , -1.30870952,  1.70892752, -0.78997687,
          0.27739381, -0.86814729, -0.52843329,  0.33524266,  0.46214174,
         -0.34115363, -0.80770991,  0.05761943, -0.59206494, -0.93826701,
         -0.82888856,  0.14857603,  0.23205435, -0.36581105, -0.88720463,
         -0.68452456],
        [ 2.2151989 ,  0.00658809,  0.47865721,  1.69803884,  0.67179289,
          1.7121521 ,  1.17345467,  1.76778512,  0.42018506, -0.58203238,
          0.22286018,  0.93499516,  1.64475751, -0.79778242, -0.5138233 ,
         -0.80031112,  0.9999573 , -0.07989157,  0.03895524,  0.63256064,
         -0.72601699, -1.11942659,  0.2288111 , -0.37943777, -0.74846599,
         -0.64869199,  0.75773755,  2.29572776,  1.70966083, -1.37173543,
         -1.82025491, -0.23852752, -0.06303521,  0.80460207,  0.6018836 ,
          0.28668966,  1.36583877, -1.55726982, -1.98576608,  0.46410517,
          1.42699915],
        [-0.41376228,  1.77948651, -1.06319075,  1.32957176,  1.58793764,
          1.8288176 , -1.51945341, -0.1634099 , -1.24755256, -0.73358053,
          0.0713808 , -0.99740837, -1.0169661 , -0.22663953,  0.84674476,
         -1.22428057,  1.62796181, -0.22541212,  1.4147017 ,  1.35982764,
          0.38804322, -0.95083489, -0.14563509, -2.42760587,  1.41728788,
          0.42656683, -0.24287271,  0.00352632,  0.04955536,  0.46485001,
         -1.05941616, -0.23674335,  2.88247842,  0.11808247, -0.03450911,
         -1.01754648, -0.26489809, -0.86153283, -0.0417403 ,  2.04857906,
         -0.861519  ],
        [ 1.59688467,  0.17712108,  1.21791543, -1.36970494, -0.54594946,
         -0.8145645 , -1.52268968, -0.72626604,  0.84679262,  0.75429198,
          0.21772121,  0.29640935,  0.76226166,  1.22833868, -0.30982771,
         -0.18619481, -0.80420535,  0.77533202,  2.10091271,  0.14929849,
          0.26348426, -0.13439598, -0.71113259,  0.01882423, -0.88177921,
         -1.36797767,  1.53635819, -0.79736728,  0.22940347,  0.2585205 ,
          0.64429907,  0.54957948, -1.36198787,  1.51223367, -1.12745475,
          0.24422117, -1.12800055,  1.28664116,  0.78149527,  0.78813608,
          2.56478464]])]

So Donnel's output layer has 10 neurons in it, his first hidden layer has 40 neurons, his second hidden layer also has 40 neurons in it, and this network takes in a 64 term input.

Now, lets train the network with 80% of the digits data set.

This Cell Takes 40 Minutes to Run


In [10]:
%%timeit -r1 -n1
core.train_network(donnel_v2, digits.data, digits.target, 20, 1438, 15, 0.1)


1 loops, best of 1: 42min 36s per loop

And let's check how accurate it is by testing it with the remaining 20% of the data set.


In [12]:
guesses = []
targets = []
rnge = [1438,1797]    
number_correct = 0
rnge = range(rnge[0],rnge[1])
for n in rnge:

    guesses.append(core.output_reader(core.propforward(donnel_v2, digits.data[n])[-1]))
    targets.append(digits.target[n])

for guess, target in zip(guesses, targets):
    if guess == target:
        number_correct+=1
number_total = len(rnge)
print(number_correct/number_total*100)
print("%d/%d" %(number_correct, number_total))


80.22284122562674
288/359

So as you can see, even though our network "Donnel_v2" has an extra hidden layer compare to his older brother, he's not any more effective at identifying numbers despite taking nearly double the amount of time to train.


In [ ]: